#install.packages("caret", dependencies = c("Depends", "Suggests"))
library(ggplot2)
library(mlbench)
library(caret)
## Loading required package: lattice
library(tabplot)
## Loading required package: bit
## Attaching package bit
## package:bit (c) 2008-2012 Jens Oehlschlaegel (GPL-2)
## creators: bit bitwhich
## coercion: as.logical as.integer as.bit as.bitwhich which
## operator: ! & | xor != ==
## querying: print length any all min max range sum summary
## bit access: length<- [ [<- [[ [[<-
## for more help type ?bit
## 
## Attaching package: 'bit'
## The following object is masked from 'package:base':
## 
##     xor
## Loading required package: ff
## Attaching package ff
## - getOption("fftempdir")=="C:/Users/admin/AppData/Local/Temp/Rtmpu8STbR"
## - getOption("ffextension")=="ff"
## - getOption("ffdrop")==TRUE
## - getOption("fffinonexit")==TRUE
## - getOption("ffpagesize")==65536
## - getOption("ffcaching")=="mmnoflush"  -- consider "ffeachflush" if your system stalls on large writes
## - getOption("ffbatchbytes")==170802544.64 -- consider a different value for tuning your system
## - getOption("ffmaxbytes")==8540127232 -- consider a different value for tuning your system
## 
## Attaching package: 'ff'
## The following objects are masked from 'package:bit':
## 
##     clone, clone.default, clone.list
## The following objects are masked from 'package:utils':
## 
##     write.csv, write.csv2
## The following objects are masked from 'package:base':
## 
##     is.factor, is.ordered
## Loading required package: ffbase
## 
## Attaching package: 'ffbase'
## The following objects are masked from 'package:ff':
## 
##     [.ff, [.ffdf, [<-.ff, [<-.ffdf
## The following objects are masked from 'package:base':
## 
##     %in%, table
## Standard deviations are plot by default. See argument numMode of plot.tabplot.
library(lattice)
library(plotly)
## 
## Attaching package: 'plotly'
## The following object is masked from 'package:ggplot2':
## 
##     last_plot
## The following object is masked from 'package:stats':
## 
##     filter
## The following object is masked from 'package:graphics':
## 
##     layout
data(Sonar)
knitr::kable(summary(Sonar))
V1 V2 V3 V4 V5 V6 V7 V8 V9 V10 V11 V12 V13 V14 V15 V16 V17 V18 V19 V20 V21 V22 V23 V24 V25 V26 V27 V28 V29 V30 V31 V32 V33 V34 V35 V36 V37 V38 V39 V40 V41 V42 V43 V44 V45 V46 V47 V48 V49 V50 V51 V52 V53 V54 V55 V56 V57 V58 V59 V60 Class
Min. :0.00150 Min. :0.00060 Min. :0.00150 Min. :0.00580 Min. :0.00670 Min. :0.01020 Min. :0.0033 Min. :0.00550 Min. :0.00750 Min. :0.0113 Min. :0.0289 Min. :0.0236 Min. :0.0184 Min. :0.0273 Min. :0.0031 Min. :0.0162 Min. :0.0349 Min. :0.0375 Min. :0.0494 Min. :0.0656 Min. :0.0512 Min. :0.0219 Min. :0.0563 Min. :0.0239 Min. :0.0240 Min. :0.0921 Min. :0.0481 Min. :0.0284 Min. :0.0144 Min. :0.0613 Min. :0.0482 Min. :0.0404 Min. :0.0477 Min. :0.0212 Min. :0.0223 Min. :0.0080 Min. :0.0351 Min. :0.0383 Min. :0.0371 Min. :0.0117 Min. :0.0360 Min. :0.0056 Min. :0.0000 Min. :0.0000 Min. :0.00000 Min. :0.00000 Min. :0.00000 Min. :0.00000 Min. :0.00000 Min. :0.00000 Min. :0.000000 Min. :0.000800 Min. :0.000500 Min. :0.001000 Min. :0.00060 Min. :0.000400 Min. :0.00030 Min. :0.000300 Min. :0.000100 Min. :0.000600 M:111
1st Qu.:0.01335 1st Qu.:0.01645 1st Qu.:0.01895 1st Qu.:0.02438 1st Qu.:0.03805 1st Qu.:0.06703 1st Qu.:0.0809 1st Qu.:0.08042 1st Qu.:0.09703 1st Qu.:0.1113 1st Qu.:0.1293 1st Qu.:0.1335 1st Qu.:0.1661 1st Qu.:0.1752 1st Qu.:0.1646 1st Qu.:0.1963 1st Qu.:0.2059 1st Qu.:0.2421 1st Qu.:0.2991 1st Qu.:0.3506 1st Qu.:0.3997 1st Qu.:0.4069 1st Qu.:0.4502 1st Qu.:0.5407 1st Qu.:0.5258 1st Qu.:0.5442 1st Qu.:0.5319 1st Qu.:0.5348 1st Qu.:0.4637 1st Qu.:0.4114 1st Qu.:0.3456 1st Qu.:0.2814 1st Qu.:0.2579 1st Qu.:0.2176 1st Qu.:0.1794 1st Qu.:0.1543 1st Qu.:0.1601 1st Qu.:0.1743 1st Qu.:0.1740 1st Qu.:0.1865 1st Qu.:0.1631 1st Qu.:0.1589 1st Qu.:0.1552 1st Qu.:0.1269 1st Qu.:0.09448 1st Qu.:0.06855 1st Qu.:0.06425 1st Qu.:0.04512 1st Qu.:0.02635 1st Qu.:0.01155 1st Qu.:0.008425 1st Qu.:0.007275 1st Qu.:0.005075 1st Qu.:0.005375 1st Qu.:0.00415 1st Qu.:0.004400 1st Qu.:0.00370 1st Qu.:0.003600 1st Qu.:0.003675 1st Qu.:0.003100 R: 97
Median :0.02280 Median :0.03080 Median :0.03430 Median :0.04405 Median :0.06250 Median :0.09215 Median :0.1070 Median :0.11210 Median :0.15225 Median :0.1824 Median :0.2248 Median :0.2490 Median :0.2640 Median :0.2811 Median :0.2817 Median :0.3047 Median :0.3084 Median :0.3683 Median :0.4350 Median :0.5425 Median :0.6177 Median :0.6649 Median :0.6997 Median :0.6985 Median :0.7211 Median :0.7545 Median :0.7456 Median :0.7319 Median :0.6808 Median :0.6071 Median :0.4904 Median :0.4296 Median :0.3912 Median :0.3510 Median :0.3127 Median :0.3211 Median :0.3063 Median :0.3127 Median :0.2835 Median :0.2781 Median :0.2595 Median :0.2451 Median :0.2225 Median :0.1777 Median :0.14800 Median :0.12135 Median :0.10165 Median :0.07810 Median :0.04470 Median :0.01790 Median :0.013900 Median :0.011400 Median :0.009550 Median :0.009300 Median :0.00750 Median :0.006850 Median :0.00595 Median :0.005800 Median :0.006400 Median :0.005300 NA
Mean :0.02916 Mean :0.03844 Mean :0.04383 Mean :0.05389 Mean :0.07520 Mean :0.10457 Mean :0.1217 Mean :0.13480 Mean :0.17800 Mean :0.2083 Mean :0.2360 Mean :0.2502 Mean :0.2733 Mean :0.2966 Mean :0.3202 Mean :0.3785 Mean :0.4160 Mean :0.4523 Mean :0.5048 Mean :0.5630 Mean :0.6091 Mean :0.6243 Mean :0.6470 Mean :0.6727 Mean :0.6754 Mean :0.6999 Mean :0.7022 Mean :0.6940 Mean :0.6421 Mean :0.5809 Mean :0.5045 Mean :0.4390 Mean :0.4172 Mean :0.4032 Mean :0.3926 Mean :0.3848 Mean :0.3638 Mean :0.3397 Mean :0.3258 Mean :0.3112 Mean :0.2893 Mean :0.2783 Mean :0.2465 Mean :0.2141 Mean :0.19723 Mean :0.16063 Mean :0.12245 Mean :0.09142 Mean :0.05193 Mean :0.02042 Mean :0.016069 Mean :0.013420 Mean :0.010709 Mean :0.010941 Mean :0.00929 Mean :0.008222 Mean :0.00782 Mean :0.007949 Mean :0.007941 Mean :0.006507 NA
3rd Qu.:0.03555 3rd Qu.:0.04795 3rd Qu.:0.05795 3rd Qu.:0.06450 3rd Qu.:0.10028 3rd Qu.:0.13412 3rd Qu.:0.1540 3rd Qu.:0.16960 3rd Qu.:0.23342 3rd Qu.:0.2687 3rd Qu.:0.3016 3rd Qu.:0.3312 3rd Qu.:0.3513 3rd Qu.:0.3862 3rd Qu.:0.4529 3rd Qu.:0.5357 3rd Qu.:0.6594 3rd Qu.:0.6791 3rd Qu.:0.7314 3rd Qu.:0.8093 3rd Qu.:0.8170 3rd Qu.:0.8320 3rd Qu.:0.8486 3rd Qu.:0.8722 3rd Qu.:0.8737 3rd Qu.:0.8938 3rd Qu.:0.9171 3rd Qu.:0.9003 3rd Qu.:0.8521 3rd Qu.:0.7352 3rd Qu.:0.6420 3rd Qu.:0.5803 3rd Qu.:0.5561 3rd Qu.:0.5961 3rd Qu.:0.5934 3rd Qu.:0.5565 3rd Qu.:0.5189 3rd Qu.:0.4405 3rd Qu.:0.4349 3rd Qu.:0.4244 3rd Qu.:0.3875 3rd Qu.:0.3842 3rd Qu.:0.3245 3rd Qu.:0.2717 3rd Qu.:0.23155 3rd Qu.:0.20037 3rd Qu.:0.15443 3rd Qu.:0.12010 3rd Qu.:0.06853 3rd Qu.:0.02527 3rd Qu.:0.020825 3rd Qu.:0.016725 3rd Qu.:0.014900 3rd Qu.:0.014500 3rd Qu.:0.01210 3rd Qu.:0.010575 3rd Qu.:0.01043 3rd Qu.:0.010350 3rd Qu.:0.010325 3rd Qu.:0.008525 NA
Max. :0.13710 Max. :0.23390 Max. :0.30590 Max. :0.42640 Max. :0.40100 Max. :0.38230 Max. :0.3729 Max. :0.45900 Max. :0.68280 Max. :0.7106 Max. :0.7342 Max. :0.7060 Max. :0.7131 Max. :0.9970 Max. :1.0000 Max. :0.9988 Max. :1.0000 Max. :1.0000 Max. :1.0000 Max. :1.0000 Max. :1.0000 Max. :1.0000 Max. :1.0000 Max. :1.0000 Max. :1.0000 Max. :1.0000 Max. :1.0000 Max. :1.0000 Max. :1.0000 Max. :1.0000 Max. :0.9657 Max. :0.9306 Max. :1.0000 Max. :0.9647 Max. :1.0000 Max. :1.0000 Max. :0.9497 Max. :1.0000 Max. :0.9857 Max. :0.9297 Max. :0.8995 Max. :0.8246 Max. :0.7733 Max. :0.7762 Max. :0.70340 Max. :0.72920 Max. :0.55220 Max. :0.33390 Max. :0.19810 Max. :0.08250 Max. :0.100400 Max. :0.070900 Max. :0.039000 Max. :0.035200 Max. :0.04470 Max. :0.039400 Max. :0.03550 Max. :0.044000 Max. :0.036400 Max. :0.043900 NA
set.seed(23)
inTraining <- 
    createDataPartition(
        # atrybut do stratyfikacji
        y = Sonar$Class,
        # procent w zbiorze uczÄ…cym
        p = .75,
        # chcemy indeksy a nie listÄ™
        list = FALSE)

training <- Sonar[ inTraining,]
testing  <- Sonar[-inTraining,]
ggplot(Sonar, aes(V1, fill=Sonar$Class,
                alpha=0.25)) + 
    geom_density()

hist(Sonar$V1)

hist(training$V1)

hist(testing$V1)

require(ggplot2)
#data(Sonar)

tableplot(Sonar)

#data(training)

tableplot(training)

#data(testing)
tableplot(testing)
## Warning in tableplot_checkBins(nBins, max(N, 2)): Setting nBins (100) to
## number of rows (51)

barplot(as.matrix(Sonar), main="Sonar")
## Warning in apply(height, 2L, cumsum): pojawiły się wartości NA na skutek
## przekształcenia

barplot(as.matrix(training), main="training")
## Warning in apply(height, 2L, cumsum): pojawiły się wartości NA na skutek
## przekształcenia

barplot(as.matrix(testing), main="testing")
## Warning in apply(height, 2L, cumsum): pojawiły się wartości NA na skutek
## przekształcenia

ctrl <- trainControl(
    # powtórzona ocena krzyżowa
    method = "repeatedcv",
    # liczba podziałów
    number = 2,
    # liczba powtórzeń
    repeats = 5)
#knitr::kable(summary(ctrl))
#head(ctrl)
set.seed(23)
fit <- train(Class ~ .,
             data = training,
             method = "rf",
             trControl = ctrl,
             # Paramter dla algorytmu uczÄ…cego
             ntree = 10)
## Loading required package: randomForest
## randomForest 4.6-12
## Type rfNews() to see new features/changes/bug fixes.
## 
## Attaching package: 'randomForest'
## The following object is masked from 'package:ggplot2':
## 
##     margin
fit
## Random Forest 
## 
## 157 samples
##  60 predictor
##   2 classes: 'M', 'R' 
## 
## No pre-processing
## Resampling: Cross-Validated (2 fold, repeated 5 times) 
## Summary of sample sizes: 79, 78, 79, 78, 79, 78, ... 
## Resampling results across tuning parameters:
## 
##   mtry  Accuracy   Kappa    
##    2    0.7312074  0.4556372
##   31    0.7427296  0.4801365
##   60    0.7210808  0.4375965
## 
## Accuracy was used to select the optimal model using  the largest value.
## The final value used for the model was mtry = 31.
rfClasses <- predict(fit, newdata = testing)
confusionMatrix(data = rfClasses, testing$Class)
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  M  R
##          M 23  8
##          R  4 16
##                                           
##                Accuracy : 0.7647          
##                  95% CI : (0.6251, 0.8721)
##     No Information Rate : 0.5294          
##     P-Value [Acc > NIR] : 0.0004667       
##                                           
##                   Kappa : 0.5234          
##  Mcnemar's Test P-Value : 0.3864762       
##                                           
##             Sensitivity : 0.8519          
##             Specificity : 0.6667          
##          Pos Pred Value : 0.7419          
##          Neg Pred Value : 0.8000          
##              Prevalence : 0.5294          
##          Detection Rate : 0.4510          
##    Detection Prevalence : 0.6078          
##       Balanced Accuracy : 0.7593          
##                                           
##        'Positive' Class : M               
## 
rfGrid <- expand.grid(mtry = 10:30)
gridCtrl <- trainControl(
    method = "repeatedcv",
    summaryFunction = twoClassSummary,
    classProbs = TRUE,
    number = 2,
    repeats = 5)

set.seed(23)
fitTune <- train(Class ~ .,
             data = training,
             method = "rf",
             metric = "ROC",
             preProc = c("center", "scale"),
             trControl = gridCtrl,
             tuneGrid = rfGrid,
             ntree = 30)

fitTune
## Random Forest 
## 
## 157 samples
##  60 predictor
##   2 classes: 'M', 'R' 
## 
## Pre-processing: centered (60), scaled (60) 
## Resampling: Cross-Validated (2 fold, repeated 5 times) 
## Summary of sample sizes: 79, 78, 79, 78, 79, 78, ... 
## Resampling results across tuning parameters:
## 
##   mtry  ROC        Sens       Spec     
##   10    0.8521727  0.8428571  0.6798799
##   11    0.8491527  0.8238095  0.6741742
##   12    0.8504907  0.8190476  0.6854354
##   13    0.8633741  0.8428571  0.7020270
##   14    0.8461855  0.8357143  0.6801802
##   15    0.8378933  0.8261905  0.6416667
##   16    0.8482313  0.8333333  0.6470721
##   17    0.8477361  0.8428571  0.6577327
##   18    0.8395771  0.8142857  0.6662162
##   19    0.8409347  0.8190476  0.6636637
##   20    0.8489043  0.8309524  0.6740991
##   21    0.8496059  0.8166667  0.6988739
##   22    0.8273077  0.8047619  0.6499249
##   23    0.8480811  0.8476190  0.6692943
##   24    0.8461962  0.8404762  0.6772523
##   25    0.8480775  0.8261905  0.6659159
##   26    0.8461479  0.8261905  0.6713964
##   27    0.8421225  0.8261905  0.6526276
##   28    0.8375617  0.8285714  0.6603604
##   29    0.8368949  0.8476190  0.6739489
##   30    0.8380005  0.8285714  0.6798048
## 
## ROC was used to select the optimal model using  the largest value.
## The final value used for the model was mtry = 13.
ggplot(fitTune) + theme_bw()

rfTuneClasses <- predict(fitTune,
                         newdata = testing)
confusionMatrix(data = rfTuneClasses, 
                testing$Class)
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  M  R
##          M 24  7
##          R  3 17
##                                           
##                Accuracy : 0.8039          
##                  95% CI : (0.6688, 0.9018)
##     No Information Rate : 0.5294          
##     P-Value [Acc > NIR] : 4.341e-05       
##                                           
##                   Kappa : 0.6028          
##  Mcnemar's Test P-Value : 0.3428          
##                                           
##             Sensitivity : 0.8889          
##             Specificity : 0.7083          
##          Pos Pred Value : 0.7742          
##          Neg Pred Value : 0.8500          
##              Prevalence : 0.5294          
##          Detection Rate : 0.4706          
##    Detection Prevalence : 0.6078          
##       Balanced Accuracy : 0.7986          
##                                           
##        'Positive' Class : M               
## 
library(pROC)
## Type 'citation("pROC")' for a citation.
## 
## Attaching package: 'pROC'
## The following objects are masked from 'package:stats':
## 
##     cov, smooth, var
rfTuneProbs <- predict(fitTune, 
                       newdata = testing,
                       type="prob")
rocCurve <- roc(response = testing$Class,
                predictor = rfTuneProbs[, "M"],
                levels = rev(levels(testing$Class)))
plot(rocCurve)

## 
## Call:
## roc.default(response = testing$Class, predictor = rfTuneProbs[,     "M"], levels = rev(levels(testing$Class)))
## 
## Data: rfTuneProbs[, "M"] in 24 controls (testing$Class R) < 27 cases (testing$Class M).
## Area under the curve: 0.8889
with (cars, plot(speed, dist))

state <- data.frame(state.x77, region = state.region)
xyplot(Life.Exp ~ Income | region, data = state, layout = c(4, 1))

qplot(speed, dist, data = cars)

p <- ggplot() + 
coord_cartesian() +
scale_x_continuous()+
scale_y_continuous()+
scale_color_hue() +
facet_wrap(~cut) +
layer(data=diamonds, 
  mapping=aes(
      x=carat, 
      y=price,
      color=color), 
  stat="identity", 
  geom="point", 
  position=
   position_jitter()
)
p

p + layer(
 data=diamonds,
 mapping=aes(
   x=carat,
   y=price),
 stat="smooth",
 params=list(
   method="glm",
   formula=
     y~poly(x,2),
   color="black"),
 geom="smooth",
 position=
  position_identity()
)

ggplot(
  cars, 
  aes(x=speed, 
      y=dist,
   color=factor(speed*dist)
   )
) +
geom_point()

ggplot(
  diamonds, 
  aes(x=price, 
      y=carat,
   color=factor(color)
   )
) +
geom_point()

ggplot(
  diamonds, 
  aes(x=carat, 
      y=price,
   color=factor(color)
   )
) +
geom_point()

ggplot(
  mpg, 
  aes(x=displ, 
      y=hwy,
   color=factor(cyl)
   )
) +
geom_point()+geom_line()

ggplot(
  mpg, 
  aes(x=displ, 
      y=hwy,
   color=factor(cyl)
   )
) +
geom_bar(
  stat="identity",
  position=
      "identity")

ggplot(
  mpg, 
  aes(x=displ, 
      y=hwy,
   color=factor(cyl)
   )
) +
geom_point() +
geom_smooth(
  data=subset(
      mpg,
      cyl != 5), 
  method="lm")

ggplot(
  mpg, 
  aes(x=displ, 
      y=hwy)
) +
geom_point() +
geom_smooth() + 
facet_grid(. ~ year)
## `geom_smooth()` using method = 'loess'

head(mpg)
## # A tibble: 6 × 11
##   manufacturer model displ  year   cyl      trans   drv   cty   hwy    fl
##          <chr> <chr> <dbl> <int> <int>      <chr> <chr> <int> <int> <chr>
## 1         audi    a4   1.8  1999     4   auto(l5)     f    18    29     p
## 2         audi    a4   1.8  1999     4 manual(m5)     f    21    29     p
## 3         audi    a4   2.0  2008     4 manual(m6)     f    20    31     p
## 4         audi    a4   2.0  2008     4   auto(av)     f    21    30     p
## 5         audi    a4   2.8  1999     6   auto(l5)     f    16    26     p
## 6         audi    a4   2.8  1999     6 manual(m5)     f    18    26     p
## # ... with 1 more variables: class <chr>
ggplot(mpg, aes(hwy, fill=drv)) + 
    geom_bar()

ggplot(mpg, aes(hwy, fill=drv,
                alpha=0.25)) + 
    geom_density()

ggplot(mpg, aes(hwy, fill=drv,
                alpha=0.25)) + 
    geom_density() + theme_bw()

df=data.frame(x=1:10,y=factor(letters[1:5]))
ggplot(df, aes(x, x, size=x)) + geom_point()

df=data.frame(x=1:10,y=factor(letters[1:5]))
ggplot(df, aes(x, x, color=x))+geom_point()

df=data.frame(x=1:10,y=factor(letters[1:5]))
ggplot(df, aes(y, y, shape=y))+geom_point()

df=data.frame(x=1:10,y=factor(letters[1:5]))
ggplot(df, aes(y, y, color=y))+geom_point()

df=data.frame(x=1:10,y=factor(letters[1:5]))
ggplot(df, aes(y, y, color=y))+geom_point()+ coord_trans(x="log10", y="log10")

df=data.frame(x=1:10,y=factor(letters[1:5]))
ggplot(df, aes(y, y, color=y))+geom_point()+ coord_polar(theta="x")

d=data.frame(height=c(1,2,2,3,4), weight=c(1,3,4,4,2))
p = ggplot() +
    geom_line(data=d, mapping=aes(x=height, y=weight)) +
    geom_point(data=d, mapping=aes(x=height, y=weight), size=8, fill="white", shape=21) +
    geom_text(data=d,mapping=aes(x=height, y=weight, label=seq(1,nrow(d))))

p

p + coord_flip()

p + coord_trans(x="log10", y="log10")

p + coord_equal()

p + coord_polar(theta="x")

require(maps)
## Loading required package: maps
d = data.frame(map(database="italy", plot=F)[c("x", "y")])
ggplot() + coord_map() +
geom_polygon(data=d, mapping=aes(x=x, y=y), fill="red", color="black", size=0.2)

require(maps)
d = data.frame(map(database="italy", plot=F)[c("x", "y")])
ggplot() + coord_map() +
geom_polygon(data=d, mapping=aes(x=x, y=y), fill="red", color="black", size=0.2)+ coord_trans(x="log10", y="log10")

require(maps)
d = data.frame(map(database="italy", plot=F)[c("x", "y")])
ggplot() + coord_map() +
geom_polygon(data=d, mapping=aes(x=x, y=y), fill="red", color="black", size=0.2)+ coord_equal()

require(maps)
d = data.frame(map(database="italy", plot=F)[c("x", "y")])
ggplot() + coord_map() +
geom_polygon(data=d, mapping=aes(x=x, y=y), fill="red", color="black", size=0.2)+ coord_polar(theta="x")

p <- ggplot(
 data=mpg, 
 aes(x=displ,
   y=hwy,
   fill=factor(cyl))
 ) + 
 geom_bar(
   stat="identity",
   position=
       "identity")
p

p + scale_fill_brewer()

p + scale_fill_brewer(
  palette="Set1")

p + scale_fill_brewer(
  palette="Spectral")

p +
scale_fill_manual(
  values=
      c("red",
        "blue",
        "green",
        "orange"))

p +
scale_fill_manual(
  values=
      c("#CC6666",
        "#9999CC",
        "#66CC99",
        "#ffa500"))

dsamp <- diamonds[sample(nrow(diamonds),1000),]

p <- ggplot(dsamp, aes(carat, price,
                       color=clarity)) + 
    geom_point()
ggplotly(p)
p + layer(
 data=dsamp,
 mapping=aes(
   x=carat,
   y=price),
 stat="smooth",
 params=list(
   method="glm",
   formula=
     y~poly(x,2),
   color="black"),
 geom="smooth",
 position=
  position_identity()
)

Note that the echo = FALSE parameter was added to the code chunk to prevent printing of the R code that generated the plot.